# install the required packages first
require(jsonlite)
## Loading required package: jsonlite
require(httr)
## Loading required package: httr
require(data.table)
## Loading required package: data.table
get_token <- function(username, password, url_site){
  
  post_body = list(username=username,password=password)
  post_url_string = paste0(url_site,'/token/')
  result = POST(post_url_string, body = post_body)
  
  # error handling (wrong credentials)
  if(result$status_code==400){
    print('Check your credentials')
    return(0)
  }
  else if (result$status_code==201){
    output = content(result)
    token = output$key
  }
  
  return(token)
}

get_data <- function(start_date='2020-03-20', token, url_site){
  
  post_body = list(start_date=start_date,username=username,password=password)
  post_url_string = paste0(url_site,'/dataset/')
  
  header = add_headers(c(Authorization=paste('Token',token,sep=' ')))
  result = GET(post_url_string, header, body = post_body)
  output = content(result)
  data = data.table::rbindlist(output)
  data[,event_date:=as.Date(event_date)]
  data = data[order(product_content_id,event_date)]
  return(data)
}


send_submission <- function(predictions, token, url_site, submit_now=F){
  
  format_check=check_format(predictions)
  if(!format_check){
    return(FALSE)
  }
  
  post_string="list("
  for(i in 1:nrow(predictions)){
    post_string=sprintf("%s'%s'=%s",post_string,predictions$product_content_id[i],predictions$forecast[i])
    if(i<nrow(predictions)){
      post_string=sprintf("%s,",post_string)
    } else {
      post_string=sprintf("%s)",post_string)
    }
  }
  
  submission = eval(parse(text=post_string))
  json_body = jsonlite::toJSON(submission, auto_unbox = TRUE)
  submission=list(submission=json_body)
  
  print(submission)
  # {"31515569":2.4,"32939029":2.4,"4066298":2.4,"6676673":2.4,"7061886":2.4,"85004":2.4} 
  
  if(!submit_now){
    print("You did not submit.")
    return(FALSE)      
  }
  
  
  header = add_headers(c(Authorization=paste('Token',token,sep=' ')))
  post_url_string = paste0(url_site,'/submission/')
  result = POST(post_url_string, header, body=submission)
  
  if (result$status_code==201){
    print("Successfully submitted. Below you can see the details of your submission")
  } else {
    print("Could not submit. Please check the error message below, contact the assistant if needed.")
  }
  
  print(content(result))
  
}

check_format <- function(predictions){
  
  if(is.data.frame(predictions) | is.data.frame(predictions)){
    if(all(c('product_content_id','forecast') %in% names(predictions))){
      if(is.numeric(predictions$forecast)){
        print("Format OK")
        return(TRUE)
      } else {
        print("forecast information is not numeric")
        return(FALSE)                
      }
    } else {
      print("Wrong column names. Please provide 'product_content_id' and 'forecast' columns")
      return(FALSE)
    }
    
  } else {
    print("Wrong format. Please provide data.frame or data.table object")
    return(FALSE)
  }
  
}
subm_url = 'http://167.172.183.67'

u_name = "Group17"
p_word = "MuH3NvpH8z8G2Z4l"
submit_now = FALSE

username = u_name
password = p_word

token = get_token(username=u_name, password=p_word, url=subm_url)
data = get_data(token=token,url=subm_url)

library("modeest")
## Registered S3 method overwritten by 'rmutil':
##   method         from
##   print.response httr
library("forecast")
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
## Registered S3 method overwritten by 'forecast':
##   method          from  
##   predict.default statip
## 
## Attaching package: 'forecast'
## The following object is masked from 'package:modeest':
## 
##     naive
library("ggfortify")
## Loading required package: ggplot2
## Registered S3 methods overwritten by 'ggfortify':
##   method                 from    
##   autoplot.Arima         forecast
##   autoplot.acf           forecast
##   autoplot.ar            forecast
##   autoplot.bats          forecast
##   autoplot.decomposed.ts forecast
##   autoplot.ets           forecast
##   autoplot.forecast      forecast
##   autoplot.stl           forecast
##   autoplot.ts            forecast
##   fitted.ar              forecast
##   fortify.ts             forecast
##   residuals.ar           forecast
library("ggplot2")
library("GGally")
## Registered S3 method overwritten by 'GGally':
##   method from   
##   +.gg   ggplot2
#splitting the data by row
la_roche<-data[product_content_id==85004]
sleepy<-data[product_content_id==4066298]
xiaomi_kulaklik<-data[product_content_id==6676673]
fakir_supurge<-data[product_content_id==7061886]
trend_tayt<-data[product_content_id==31515569]
oralb<-data[product_content_id==32939029]
trend_bikini<-data[product_content_id==5926527]
koton_mont<-data[product_content_id==3904356]

#making the new dataframes ts objects
ts_la_roche<-ts(la_roche$sold_count)
ts_sleepy<-ts(sleepy$sold_count)
ts_xiaomi<-ts(xiaomi_kulaklik$sold_count)
ts_fakir<-ts(fakir_supurge$sold_count)
ts_trend_tayt<-ts(trend_tayt$sold_count)
ts_oralb<-ts(oralb$sold_count)
ts_trend_bikini<-ts(trend_bikini$sold_count)
ts_koton<-ts(koton_mont$sold_count)

##plotting ts objects
autoplot(ts_la_roche)

autoplot(ts_sleepy)

autoplot(ts_xiaomi)

autoplot(ts_fakir)

autoplot(ts_trend_tayt)

autoplot(ts_oralb)

autoplot(ts_trend_bikini)

autoplot(ts_koton)

####Fakir supurge
##Building test and train sets
ts_fakir_train<-window(ts_fakir,start=300,end=412)
ts_fakir_test<-window(ts_fakir,start=413,end=432)

##holt
fakir_holt<-holt(ts_fakir_train,20)
summary(fakir_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = ts_fakir_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.918 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 22.5606 
##     b = 0.2232 
## 
##   sigma:  21.6685
## 
##      AIC     AICc      BIC 
## 1235.266 1235.827 1248.903 
## 
## Error measures:
##                       ME     RMSE      MAE       MPE   MAPE      MASE
## Training set -0.09897731 21.28149 15.13405 -15.52737 41.128 0.9837569
##                     ACF1
## Training set 0.008463414
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80       Lo 95     Hi 95
## 413       37.97499  10.2057424  65.74423   -4.494408  80.44438
## 414       38.19711   0.4996482  75.89457  -19.456182  95.85040
## 415       38.41923  -7.0912933  83.92975  -31.183108 108.02157
## 416       38.64135 -13.5262496  90.80895  -41.142108 118.42481
## 417       38.86347 -19.2041697  96.93111  -49.943321 127.67026
## 418       39.08559 -24.3366834 102.50786  -57.910406 136.08158
## 419       39.30771 -29.0520764 107.66750  -65.239561 143.85498
## 420       39.52983 -33.4350773 112.49474  -72.060366 151.12003
## 421       39.75195 -37.5451179 117.04902  -78.463714 157.96761
## 422       39.97407 -41.4257912 121.37393  -84.516275 164.46442
## 423       40.19619 -45.1101980 125.50258  -90.268672 170.66105
## 424       40.41831 -48.6241743 129.46080  -95.760418 176.59704
## 425       40.64043 -51.9883448 133.26921 -101.023057 182.30392
## 426       40.86255 -55.2194858 136.94459 -106.082244 187.80735
## 427       41.08467 -58.3314609 140.50080 -110.959182 193.12852
## 428       41.30679 -61.3358831 143.94947 -115.671633 198.28522
## 429       41.52891 -64.2425957 147.30042 -120.234650 203.29247
## 430       41.75103 -67.0600280 150.56209 -124.661124 208.16319
## 431       41.97315 -69.7954641 153.74177 -128.962196 212.90850
## 432       42.19527 -72.4552499 156.84579 -133.147570 217.53811
autoplot(fakir_holt)

accuracy(fakir_holt,ts_fakir_test)
##                       ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.09897731 21.28149 15.13405 -15.527366 41.12800 0.9837569
## Test set     18.76486977 43.01421 26.74107   7.868026 35.82515 1.7382474
##                     ACF1 Theil's U
## Training set 0.008463414        NA
## Test set     0.728084568  1.087074
checkresiduals(fakir_holt)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 18.236, df = 6, p-value = 0.005668
## 
## Model df: 4.   Total lags used: 10
##ses
fakir_ses<-ses(ts_fakir_train,20)
summary(fakir_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = ts_fakir_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.9191 
## 
##   Initial states:
##     l = 28.817 
## 
##   sigma:  21.4632
## 
##      AIC     AICc      BIC 
## 1231.169 1231.390 1239.352 
## 
## Error measures:
##                     ME     RMSE      MAE       MPE    MAPE      MASE
## Training set 0.0858888 21.27238 15.07392 -14.99559 40.7554 0.9798483
##                     ACF1
## Training set 0.007697116
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80       Lo 95     Hi 95
## 413       37.73709  10.2309330  65.24325   -4.329947  79.80412
## 414       37.73709   0.3781179  75.09606  -19.398527  94.87271
## 415       37.73709  -7.3716332  82.84581  -31.250748 106.72493
## 416       37.73709 -13.9726869  89.44687  -41.346189 116.82037
## 417       37.73709 -19.8216204  95.29580  -50.291361 125.76554
## 418       37.73709 -25.1287135 100.60289  -58.407859 133.88204
## 419       37.73709 -30.0214029 105.49558  -65.890582 141.36476
## 420       37.73709 -34.5838438 110.05802  -72.868233 148.34241
## 421       37.73709 -38.8750604 114.34924  -79.431083 154.90526
## 422       37.73709 -42.9383443 118.41252  -85.645339 161.11952
## 423       37.73709 -46.8065662 122.28074  -91.561274 167.03545
## 424       37.73709 -50.5053823 125.97956  -97.218125 172.69230
## 425       37.73709 -54.0552736 129.52945 -102.647215 178.12139
## 426       37.73709 -57.4728996 132.94708 -107.874023 183.34820
## 427       37.73709 -60.7720275 136.24621 -112.919604 188.39378
## 428       37.73709 -63.9641900 139.43837 -117.801595 193.27577
## 429       37.73709 -67.0591621 142.53334 -122.534946 198.00912
## 430       37.73709 -70.0653152 145.53949 -127.132461 202.60664
## 431       37.73709 -72.9898835 148.46406 -131.605202 207.07938
## 432       37.73709 -75.8391698 151.31335 -135.962809 211.43699
autoplot(fakir_ses)+autolayer(fitted(fakir_ses))

accuracy(fakir_ses,ts_fakir_test)
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set  0.0858888 21.27238 15.07392 -14.99559 40.75540 0.9798483
## Test set     21.1129108 43.26295 26.55516  14.23702 33.64059 1.7261627
##                     ACF1 Theil's U
## Training set 0.007697116        NA
## Test set     0.715884135  1.079397
##naive
fakir_naive<-naive(ts_fakir_train,20)
summary(fakir_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_fakir_train, h = 20) 
## 
## Residual sd: 21.5198 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE     MAPE MASE        ACF1
## Training set 0.08035714 21.42366 15.38393 -14.25105 40.98362    1 -0.06574646
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80       Lo 95     Hi 95
## 413             38  10.5444748  65.45553   -3.989602  79.98960
## 414             38  -0.8279761  76.82798  -21.382265  97.38227
## 415             38  -9.5543646  85.55436  -34.728125 110.72812
## 416             38 -16.9110504  92.91105  -45.979205 121.97920
## 417             38 -23.3924207  99.39242  -55.891605 131.89161
## 418             38 -29.2520274 105.25203  -64.853100 140.85310
## 419             38 -34.6404918 110.64049  -73.094045 149.09405
## 420             38 -39.6559522 115.65595  -80.764530 156.76453
## 421             38 -44.3665756 120.36658  -87.968807 163.96881
## 422             38 -48.8219940 124.82199  -94.782781 170.78278
## 423             38 -53.0596755 129.05968 -101.263756 177.26376
## 424             38 -57.1087292 133.10873 -107.456249 183.45625
## 425             38 -60.9923039 136.99230 -113.395664 189.39566
## 426             38 -64.7291687 140.72917 -119.110706 195.11071
## 427             38 -68.3347919 144.33479 -124.625030 200.62503
## 428             38 -71.8221008 147.82210 -129.958409 205.95841
## 429             38 -75.2020304 151.20203 -135.127566 211.12757
## 430             38 -78.4839283 154.48393 -140.146795 216.14680
## 431             38 -81.6758598 157.67586 -145.028433 221.02843
## 432             38 -84.7848414 160.78484 -149.783210 225.78321
accuracy(fakir_naive,ts_fakir_test)
##                       ME     RMSE      MAE       MPE     MAPE     MASE
## Training set  0.08035714 21.42366 15.38393 -14.25105 40.98362 1.000000
## Test set     20.85000000 43.13525 26.45000  13.63951 33.59629 1.719327
##                     ACF1 Theil's U
## Training set -0.06574646        NA
## Test set      0.71588414  1.076731
##mean
fakir_mean<-meanf(ts_fakir_train,20)
summary(fakir_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 54.28319
## 
## $mu.se
## [1] 3.257794
## 
## $sd
## [1] 34.63083
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_fakir_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                         ME     RMSE      MAE       MPE    MAPE     MASE
## Training set -7.530542e-16 34.47725 28.15647 -62.25836 90.4686 1.830252
##                   ACF1
## Training set 0.8052821
## 
## Forecasts:
##     Point Forecast   Lo 80    Hi 80    Lo 95    Hi 95
## 413       54.28319 9.44154 99.12483 -14.6363 123.2027
## 414       54.28319 9.44154 99.12483 -14.6363 123.2027
## 415       54.28319 9.44154 99.12483 -14.6363 123.2027
## 416       54.28319 9.44154 99.12483 -14.6363 123.2027
## 417       54.28319 9.44154 99.12483 -14.6363 123.2027
## 418       54.28319 9.44154 99.12483 -14.6363 123.2027
## 419       54.28319 9.44154 99.12483 -14.6363 123.2027
## 420       54.28319 9.44154 99.12483 -14.6363 123.2027
## 421       54.28319 9.44154 99.12483 -14.6363 123.2027
## 422       54.28319 9.44154 99.12483 -14.6363 123.2027
## 423       54.28319 9.44154 99.12483 -14.6363 123.2027
## 424       54.28319 9.44154 99.12483 -14.6363 123.2027
## 425       54.28319 9.44154 99.12483 -14.6363 123.2027
## 426       54.28319 9.44154 99.12483 -14.6363 123.2027
## 427       54.28319 9.44154 99.12483 -14.6363 123.2027
## 428       54.28319 9.44154 99.12483 -14.6363 123.2027
## 429       54.28319 9.44154 99.12483 -14.6363 123.2027
## 430       54.28319 9.44154 99.12483 -14.6363 123.2027
## 431       54.28319 9.44154 99.12483 -14.6363 123.2027
## 432       54.28319 9.44154 99.12483 -14.6363 123.2027
accuracy(fakir_mean,ts_fakir_test)
##                         ME     RMSE      MAE       MPE    MAPE     MASE
## Training set -7.530542e-16 34.47725 28.15647 -62.25836 90.4686 1.830252
## Test set      4.566814e+00 38.03660 28.26327 -23.36638 50.6316 1.837195
##                   ACF1 Theil's U
## Training set 0.8052821        NA
## Test set     0.7158841  1.143946
##ets
fakir_ets<-ets(ts_fakir_train)
checkresiduals(fakir_ets)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(M,A,N)
## Q* = 17.292, df = 6, p-value = 0.008268
## 
## Model df: 4.   Total lags used: 10
fakir_ets_fc<-forecast(fakir_ets,20)
autoplot(fakir_ets_fc)

summary(fakir_ets)
## ETS(M,A,N) 
## 
## Call:
##  ets(y = ts_fakir_train) 
## 
##   Smoothing parameters:
##     alpha = 0.7941 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 21.7497 
##     b = 4.9079 
## 
##   sigma:  0.4052
## 
##      AIC     AICc      BIC 
## 1228.164 1228.725 1241.801 
## 
## Training set error measures:
##                     ME     RMSE      MAE       MPE     MAPE     MASE      ACF1
## Training set -5.956631 22.22949 15.87115 -34.96477 48.83305 1.031671 0.1194258
accuracy(fakir_ets_fc,ts_fakir_test)
##                      ME     RMSE      MAE        MPE      MAPE     MASE
## Training set  -5.956631 22.22949 15.87115  -34.96477  48.83305 1.031671
## Test set     -30.375447 68.29923 60.47045 -125.28339 148.99613 3.930755
##                   ACF1 Theil's U
## Training set 0.1194258        NA
## Test set     0.8489647  3.314609
##arima
fakir_arima<-auto.arima(ts_fakir_train)
checkresiduals(fakir_arima)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,1,1)
## Q* = 13.782, df = 8, p-value = 0.08763
## 
## Model df: 2.   Total lags used: 10
fakir_arima_fc<-forecast(fakir_arima,20)
autoplot(fakir_arima_fc)

accuracy(fakir_arima_fc,ts_fakir_test)
##                     ME     RMSE      MAE       MPE     MAPE     MASE       ACF1
## Training set  0.325362 20.46018 14.52048 -21.10029 42.63872 0.943873 0.04844925
## Test set     21.141301 43.24128 26.54117  14.34107 33.59578 1.725253 0.71507735
##              Theil's U
## Training set        NA
## Test set      1.078552
##tbats
fakir_tbats<-tbats(ts_fakir_train)
fakir_tbats_fc<-forecast(fakir_tbats,20)
autoplot(fakir_tbats_fc)

accuracy(fakir_tbats_fc,ts_fakir_test)
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set  0.4454263 21.28353 14.86734 -14.79454 40.51517 0.9664201
## Test set     21.5038192 43.45505 26.71153  15.12541 33.70647 1.7363268
##                    ACF1 Theil's U
## Training set 0.07074501        NA
## Test set     0.71588414  1.083569
##linear regression
autoplot(ts_fakir)

fakir_supurge<-fakir_supurge[,-"product_content_id"]
train.fakir<-fakir_supurge[300:412,]
test.fakir<-fakir_supurge[413:432,]
test.fakir2<-fakir_supurge[413:414,]

fakir_linear<-lm(sold_count~.,data=train.fakir)
fakir.predict<-predict(fakir_linear,test.fakir)
accuracy(fakir.predict,test.fakir$sold_count)
##                ME     RMSE      MAE      MPE     MAPE
## Test set 3.172859 17.00531 10.90204 2.794273 16.10993
plot(fakir.predict,type="l",xlab="Days",ylab="Fakir Vacuum")

##linear according to correlation
ggcorr(train.fakir,label=TRUE)
## Warning in ggcorr(train.fakir, label = TRUE): data in column(s) 'event_date' are
## not numeric and were ignored

fakir_linear_corr<-lm(sold_count~basket_count+favored_count+visit_count+
                        category_visits,train.fakir)
fakir.predict.corr<-predict(fakir_linear_corr,test.fakir)
accuracy(fakir.predict.corr,test.fakir$sold_count)
##                ME     RMSE      MAE      MPE     MAPE
## Test set 6.747498 18.68313 12.20994 11.41426 17.01206
####Xiaomi Kulaklik
##Building test and train sets
ts_xiaomi_train<-window(ts_xiaomi,start=300,end=412)
ts_xiaomi_test<-window(ts_xiaomi,start=413,end=432)

##holt
xiaomi_holt<-holt(ts_xiaomi_train,20)
summary(xiaomi_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = ts_xiaomi_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.7548 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 310.305 
##     b = -0.1538 
## 
##   sigma:  137.8548
## 
##      AIC     AICc      BIC 
## 1653.444 1654.005 1667.081 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.01421797 135.3929 84.31978 -9.701816 26.56268 0.9504645
##                      ACF1
## Training set -0.009278146
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80      Lo 95     Hi 95
## 413       295.3343  118.66622 472.0024   25.14379  565.5248
## 414       295.1806   73.82390 516.5374  -43.35528  633.7166
## 415       295.0270   36.58798 553.4660 -100.22137  690.2753
## 416       294.8733    4.03429 585.7124 -149.92659  739.6732
## 417       294.7197  -25.26265 614.7020 -194.65106  784.0904
## 418       294.5660  -52.12512 641.2571 -235.65231  824.7843
## 419       294.4123  -77.07857 665.9033 -273.73399  862.5587
## 420       294.2587 -100.48299 689.0004 -309.44661  897.9640
## 421       294.1050 -122.59773 710.8078 -343.18685  931.3969
## 422       293.9514 -143.61703 731.5198 -375.25176  963.1545
## 423       293.7977 -163.69081 751.2862 -405.87060  993.4660
## 424       293.6441 -182.93763 770.2258 -435.22474 1022.5129
## 425       293.4904 -201.45321 788.4340 -463.46054 1050.4413
## 426       293.3367 -219.31614 805.9896 -490.69818 1077.3717
## 427       293.1831 -236.59186 822.9580 -517.03778 1103.4040
## 428       293.0294 -253.33559 839.3944 -542.56376 1128.6226
## 429       292.8758 -269.59440 855.3459 -567.34812 1153.0997
## 430       292.7221 -285.40882 870.8530 -591.45284 1176.8971
## 431       292.5685 -300.81403 885.9509 -614.93174 1200.0687
## 432       292.4148 -315.84083 900.6704 -637.83190 1222.6615
autoplot(xiaomi_holt)

accuracy(xiaomi_holt,ts_xiaomi_test)
##                       ME     RMSE      MAE       MPE     MAPE      MASE
## Training set  0.01421797 135.3929 84.31978 -9.701816 26.56268 0.9504645
## Test set     45.87545350 128.2069 87.73169  4.793077 22.99076 0.9889241
##                      ACF1 Theil's U
## Training set -0.009278146        NA
## Test set      0.370646079  1.007564
checkresiduals(xiaomi_holt)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 7.654, df = 6, p-value = 0.2646
## 
## Model df: 4.   Total lags used: 10
##ses
xiaomi_ses<-ses(ts_xiaomi_train,20)
summary(xiaomi_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = ts_xiaomi_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.7547 
## 
##   Initial states:
##     l = 312.9025 
## 
##   sigma:  136.5981
## 
##      AIC     AICc      BIC 
## 1649.429 1649.649 1657.611 
## 
## Error measures:
##                      ME     RMSE      MAE      MPE     MAPE      MASE
## Training set -0.2036101 135.3839 84.33135 -9.77368 26.57569 0.9505949
##                      ACF1
## Training set -0.009301759
## 
## Forecasts:
##     Point Forecast       Lo 80    Hi 80      Lo 95     Hi 95
## 413       295.5373  120.479853 470.5948   27.81002  563.2647
## 414       295.5373   76.215865 514.8588  -39.88591  630.9606
## 415       295.5373   39.493018 551.5817  -96.04866  687.1233
## 416       295.5373    7.413268 583.6614 -145.11040  736.1851
## 417       295.5373  -21.436265 612.5109 -189.23196  780.3066
## 418       295.5373  -47.870656 638.9453 -229.65987  820.7345
## 419       295.5373  -72.410805 663.4855 -267.19079  858.2655
## 420       295.5373  -95.413582 686.4883 -302.37050  893.4452
## 421       295.5373 -117.136149 708.2108 -335.59230  926.6670
## 422       295.5373 -137.771089 728.8458 -367.15071  958.2254
## 423       295.5373 -157.467053 748.5417 -397.27309  988.3478
## 424       295.5373 -176.341634 767.4163 -426.13927 1017.2140
## 425       295.5373 -194.489752 785.5644 -453.89443 1044.9691
## 426       295.5373 -211.989346 803.0640 -480.65774 1071.7324
## 427       295.5373 -228.905338 819.9800 -506.52852 1097.6032
## 428       295.5373 -245.292493 836.3672 -531.59051 1122.6652
## 429       295.5373 -261.197511 852.2722 -555.91514 1146.9898
## 430       295.5373 -276.660597 867.7353 -579.56389 1170.6386
## 431       295.5373 -291.716664 882.7913 -602.59015 1193.6648
## 432       295.5373 -306.396253 897.4709 -625.04065 1216.1153
autoplot(xiaomi_ses)+autolayer(fitted(xiaomi_ses))

accuracy(xiaomi_ses,ts_xiaomi_test)
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set -0.2036101 135.3839 84.33135 -9.773680 26.57569 0.9505949
## Test set     44.2126612 127.7934 87.49627  4.250728 23.02930 0.9862703
##                      ACF1 Theil's U
## Training set -0.009301759        NA
## Test set      0.372875096  1.005521
##naive
xiaomi_naive<-naive(ts_xiaomi_train,20)
summary(xiaomi_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_xiaomi_train, h = 20) 
## 
## Residual sd: 140.9433 
## 
## Error measures:
##              ME     RMSE      MAE       MPE     MAPE MASE       ACF1
## Training set  0 140.3127 88.71429 -9.091318 27.68069    1 -0.2562068
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80      Lo 95     Hi 95
## 413            302  122.182090  481.8179   26.99224  577.0078
## 414            302   47.699072  556.3009  -86.91971  690.9197
## 415            302   -9.453757  613.4538 -174.32742  778.3274
## 416            302  -57.635821  661.6358 -248.01552  852.0155
## 417            302 -100.085071  704.0851 -312.93605  916.9361
## 418            302 -138.462127  742.4621 -371.62869  975.6287
## 419            302 -173.753472  777.7535 -425.60215 1029.6021
## 420            302 -206.601855  810.6019 -475.83941 1079.8394
## 421            302 -237.453731  841.4537 -523.02329 1127.0233
## 422            302 -266.634161  870.6342 -567.65090 1171.6509
## 423            302 -294.388540  898.3885 -610.09756 1214.0976
## 424            302 -320.907514  924.9075 -650.65483 1254.6548
## 425            302 -346.342696  950.3427 -689.55459 1293.5546
## 426            302 -370.817013  974.8170 -726.98483 1330.9848
## 427            302 -394.431773  998.4318 -763.10048 1367.1005
## 428            302 -417.271642 1021.2716 -798.03105 1402.0310
## 429            302 -439.408238 1043.4082 -831.88605 1435.8861
## 430            302 -460.902783 1064.9028 -864.75912 1468.7591
## 431            302 -481.808100 1085.8081 -896.73104 1500.7310
## 432            302 -502.170143 1106.1701 -927.87210 1531.8721
accuracy(xiaomi_naive,ts_xiaomi_test)
##                 ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set  0.00 140.3127 88.71429 -9.091318 27.68069 1.0000000 -0.2562068
## Test set     37.75 125.7038 87.15000  2.156931 23.41455 0.9823671  0.3728751
##              Theil's U
## Training set        NA
## Test set     0.9982734
##mean
xiaomi_mean<-meanf(ts_xiaomi_train,20)
summary(xiaomi_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 392.3982
## 
## $mu.se
## [1] 18.33721
## 
## $sd
## [1] 194.9272
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_xiaomi_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                        ME     RMSE     MAE       MPE     MAPE     MASE
## Training set 2.315877e-14 194.0628 139.219 -24.36656 44.53376 1.569297
##                   ACF1
## Training set 0.7390088
## 
## Forecasts:
##     Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 413       392.3982 139.9972 644.7993 4.469764 780.3267
## 414       392.3982 139.9972 644.7993 4.469764 780.3267
## 415       392.3982 139.9972 644.7993 4.469764 780.3267
## 416       392.3982 139.9972 644.7993 4.469764 780.3267
## 417       392.3982 139.9972 644.7993 4.469764 780.3267
## 418       392.3982 139.9972 644.7993 4.469764 780.3267
## 419       392.3982 139.9972 644.7993 4.469764 780.3267
## 420       392.3982 139.9972 644.7993 4.469764 780.3267
## 421       392.3982 139.9972 644.7993 4.469764 780.3267
## 422       392.3982 139.9972 644.7993 4.469764 780.3267
## 423       392.3982 139.9972 644.7993 4.469764 780.3267
## 424       392.3982 139.9972 644.7993 4.469764 780.3267
## 425       392.3982 139.9972 644.7993 4.469764 780.3267
## 426       392.3982 139.9972 644.7993 4.469764 780.3267
## 427       392.3982 139.9972 644.7993 4.469764 780.3267
## 428       392.3982 139.9972 644.7993 4.469764 780.3267
## 429       392.3982 139.9972 644.7993 4.469764 780.3267
## 430       392.3982 139.9972 644.7993 4.469764 780.3267
## 431       392.3982 139.9972 644.7993 4.469764 780.3267
## 432       392.3982 139.9972 644.7993 4.469764 780.3267
accuracy(xiaomi_mean,ts_xiaomi_test)
##                         ME     RMSE      MAE       MPE     MAPE     MASE
## Training set  2.315877e-14 194.0628 139.2190 -24.36656 44.53376 1.569297
## Test set     -5.264823e+01 130.9512 107.2889 -27.13062 36.21218 1.209376
##                   ACF1 Theil's U
## Training set 0.7390088        NA
## Test set     0.3728751  1.214658
##ets
xiaomi_ets<-ets(ts_xiaomi_train)
checkresiduals(xiaomi_ets)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(M,N,N)
## Q* = 5.1313, df = 8, p-value = 0.7435
## 
## Model df: 2.   Total lags used: 10
xiaomi_ets_fc<-forecast(xiaomi_ets,20)
autoplot(xiaomi_ets_fc)

summary(xiaomi_ets)
## ETS(M,N,N) 
## 
## Call:
##  ets(y = ts_xiaomi_train) 
## 
##   Smoothing parameters:
##     alpha = 0.6367 
## 
##   Initial states:
##     l = 290.0437 
## 
##   sigma:  0.2807
## 
##      AIC     AICc      BIC 
## 1581.417 1581.637 1589.599 
## 
## Training set error measures:
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.05933439 136.3989 84.37635 -10.22998 26.66883 0.9511022
##                   ACF1
## Training set 0.1142942
accuracy(xiaomi_ets_fc,ts_xiaomi_test)
##                       ME     RMSE      MAE        MPE     MAPE      MASE
## Training set  0.05933439 136.3989 84.37635 -10.229983 26.66883 0.9511022
## Test set     45.43735676 128.2222 87.61874   4.647509 22.97530 0.9876508
##                   ACF1 Theil's U
## Training set 0.1142942        NA
## Test set     0.3728751  1.007253
##arima
xiaomi_arima<-auto.arima(ts_xiaomi_train)
checkresiduals(xiaomi_arima)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(2,0,0) with non-zero mean
## Q* = 4.6486, df = 7, p-value = 0.7028
## 
## Model df: 3.   Total lags used: 10
xiaomi_arima_fc<-forecast(xiaomi_arima,20)
autoplot(xiaomi_arima_fc)

accuracy(xiaomi_arima_fc,ts_xiaomi_test)
##                       ME     RMSE       MAE       MPE     MAPE     MASE
## Training set   0.9314957 129.0497  83.05174 -12.51952 26.66762 0.936171
## Test set     -28.6751203 132.8158 100.49252 -19.91566 32.24042 1.132766
##                    ACF1 Theil's U
## Training set 0.02338572        NA
## Test set     0.46785742  1.183588
##tbats

xiaomi_tbats<-tbats(ts_xiaomi_train)
xiaomi_tbats_fc<-forecast(xiaomi_tbats,20)
autoplot(xiaomi_tbats_fc)

accuracy(xiaomi_tbats_fc,ts_xiaomi_test)
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set  3.31667 135.9378 84.26349 -9.269571 26.51935 0.9498301 0.06279922
## Test set     49.75354 129.8145 88.05035  6.045883 22.78502 0.9925161 0.37287510
##              Theil's U
## Training set        NA
## Test set      1.014251
##linear regression
autoplot(ts_xiaomi)

xiaomi_kulaklik<-xiaomi_kulaklik[,-"product_content_id"]
train.xiaomi<-xiaomi_kulaklik[300:412,]
test.xiaomi<-xiaomi_kulaklik[413:432,]
xiaomi_linear<-lm(sold_count~.,data=train.xiaomi)
xiaomi.predict<-predict(xiaomi_linear,test.xiaomi)
accuracy(xiaomi.predict,test.xiaomi$sold_count)
##                 ME     RMSE      MAE       MPE   MAPE
## Test set -26.67183 42.09958 33.48992 -8.356194 10.061
plot(xiaomi.predict,type="l",xlab="Days",ylab="Xiaomi Earbuds")

##linear according to correlation
ggcorr(train.xiaomi,label=TRUE)
## Warning in ggcorr(train.xiaomi, label = TRUE): data in column(s) 'event_date'
## are not numeric and were ignored

xiaomi_linear_corr<-lm(sold_count~basket_count+favored_count+visit_count
                       ,train.xiaomi)
xiaomi.predict.corr<-predict(xiaomi_linear_corr,test.xiaomi)
accuracy(xiaomi.predict.corr,test.xiaomi$sold_count)
##                ME     RMSE      MAE      MPE     MAPE
## Test set 27.29032 55.81373 39.44342 5.109304 10.28937
####Sleepy
##Building test and train sets
ts_sleepy_train<-window(ts_sleepy,start=300,end=412)
ts_sleepy_test<-window(ts_sleepy,start=413,end=432)

##holt
sleepy_holt<-holt(ts_sleepy_train,20)
summary(sleepy_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = ts_sleepy_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.9985 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 151.5011 
##     b = 3.5023 
## 
##   sigma:  134.5753
## 
##      AIC     AICc      BIC 
## 1648.002 1648.563 1661.639 
## 
## Error measures:
##                     ME    RMSE      MAE       MPE     MAPE      MASE      ACF1
## Training set -3.127618 132.172 71.83294 -15.11699 37.67106 0.9984226 0.1837977
## 
## Forecasts:
##     Point Forecast      Lo 80     Hi 80      Lo 95     Hi 95
## 413       196.4345   23.96930  368.8996  -67.32824  460.1972
## 414       199.9015  -43.82940  443.6323 -172.85273  572.6557
## 415       203.3685  -95.07948  501.8164 -253.06828  659.8052
## 416       206.8355 -137.75641  551.4274 -320.17233  733.8433
## 417       210.3025 -174.95315  595.5581 -378.89515  799.5001
## 418       213.7695 -208.25679  635.7957 -431.66399  859.2029
## 419       217.2365 -238.61041  673.0833 -479.92117  914.3941
## 420       220.7035 -266.62859  708.0355 -524.60659  966.0135
## 421       224.1705 -292.73826  741.0792 -566.37321 1014.7141
## 422       227.6375 -317.25030  772.5252 -605.69646 1060.9714
## 423       231.1045 -340.39940  802.6083 -642.93528 1105.1442
## 424       234.5715 -362.36790  831.5108 -678.36852 1147.5114
## 425       238.0385 -383.30081  859.3777 -712.21796 1188.2949
## 426       241.5055 -403.31570  886.3266 -744.66340 1227.6743
## 427       244.9725 -422.50946  912.4544 -775.85304 1265.7980
## 428       248.4395 -440.96306  937.8420 -805.91071 1302.7896
## 429       251.9065 -458.74502  962.5579 -834.94118 1338.7541
## 430       255.3735 -475.91386  986.6608 -863.03399 1373.7809
## 431       258.8405 -492.52013 1010.2010 -890.26639 1407.9473
## 432       262.3075 -508.60775 1033.2227 -916.70561 1441.3205
autoplot(sleepy_holt)

accuracy(sleepy_holt,ts_sleepy_test)
##                      ME     RMSE       MAE       MPE     MAPE      MASE
## Training set  -3.127618 132.1720  71.83294 -15.11699 37.67106 0.9984226
## Test set     -24.120966 191.9864 146.43673 -60.50361 82.76518 2.0353579
##                   ACF1 Theil's U
## Training set 0.1837977        NA
## Test set     0.6699808  1.907687
checkresiduals(sleepy_holt)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 39.777, df = 6, p-value = 5.039e-07
## 
## Model df: 4.   Total lags used: 10
##ses
sleepy_ses<-ses(ts_sleepy_train,20)
summary(sleepy_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = ts_sleepy_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.9999 
## 
##   Initial states:
##     l = 80.9111 
## 
##   sigma:  133.1116
## 
##      AIC     AICc      BIC 
## 1643.585 1643.806 1651.767 
## 
## Error measures:
##                     ME     RMSE      MAE       MPE    MAPE      MASE     ACF1
## Training set 0.9920141 131.9283 71.31219 -12.30396 36.5862 0.9911845 0.184576
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80      Lo 95     Hi 95
## 413       192.9975   22.40815 363.5868  -67.89639  453.8914
## 414       192.9975  -48.24019 434.2352 -175.94371  561.9387
## 415       192.9975 -102.45220 488.4472 -258.85382  644.8488
## 416       192.9975 -148.15558 534.1506 -328.75111  714.7461
## 417       192.9975 -188.42133 574.4163 -390.33226  776.3273
## 418       192.9975 -224.82450 610.8195 -446.00611  832.0011
## 419       192.9975 -258.30077 644.2958 -497.20364  883.1986
## 420       192.9975 -289.45978 675.4548 -544.85723  930.8522
## 421       192.9975 -318.72501 704.7200 -589.61454  975.6095
## 422       192.9975 -346.40478 732.3998 -631.94710 1017.9421
## 423       192.9975 -372.73187 758.7269 -672.21091 1058.2059
## 424       192.9975 -397.88711 783.8821 -710.68253 1096.6775
## 425       192.9975 -422.01431 808.0093 -747.58189 1133.5769
## 426       192.9975 -445.23006 831.2251 -783.08733 1169.0823
## 427       192.9975 -467.63048 853.6255 -817.34580 1203.3408
## 428       192.9975 -489.29586 875.2909 -850.48014 1236.4751
## 429       192.9975 -510.29414 896.2891 -882.59424 1268.5892
## 430       192.9975 -530.68339 916.6784 -913.77690 1299.7719
## 431       192.9975 -550.51371 936.5087 -944.10477 1330.0998
## 432       192.9975 -569.82871 955.8237 -973.64451 1359.6395
autoplot(sleepy_ses)+autolayer(fitted(sleepy_ses))

accuracy(sleepy_ses,ts_sleepy_test)
##                      ME     RMSE       MAE       MPE     MAPE      MASE
## Training set  0.9920141 131.9283  71.31219 -12.30396 36.58620 0.9911845
## Test set     12.2525021 178.4789 115.24875 -32.49165 56.24902 1.6018689
##                   ACF1 Theil's U
## Training set 0.1845760        NA
## Test set     0.6316829  1.435676
##naive
sleepy_naive<-naive(ts_sleepy_train,20)
summary(sleepy_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_sleepy_train, h = 20) 
## 
## Residual sd: 133.1053 
## 
## Error measures:
##              ME     RMSE      MAE       MPE     MAPE MASE      ACF1
## Training set  1 132.5135 71.94643 -12.41402 36.91139    1 0.1845285
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80      Lo 95     Hi 95
## 413            193   23.17706 362.8229  -66.72177  452.7218
## 414            193  -47.16590 433.1659 -174.30205  560.3021
## 415            193 -101.14196 487.1420 -256.85131  642.8513
## 416            193 -146.64588 532.6459 -326.44355  712.4435
## 417            193 -186.73564 572.7356 -387.75554  773.7555
## 418            193 -222.97955 608.9795 -443.18582  829.1858
## 419            193 -256.30926 642.3093 -494.15922  880.1592
## 420            193 -287.33181 673.3318 -541.60411  927.6041
## 421            193 -316.46882 702.4688 -586.16532  972.1653
## 422            193 -344.02729 730.0273 -628.31236 1014.3124
## 423            193 -370.23897 756.2390 -668.39967 1054.3997
## 424            193 -395.28392 781.2839 -706.70261 1092.7026
## 425            193 -419.30532 805.3053 -743.44017 1129.4402
## 426            193 -442.41926 828.4193 -778.78989 1164.7899
## 427            193 -464.72142 850.7214 -812.89810 1198.8981
## 428            193 -486.29176 872.2918 -845.88709 1231.8871
## 429            193 -507.19792 893.1979 -877.86030 1263.8603
## 430            193 -527.49771 913.4977 -908.90616 1294.9062
## 431            193 -547.24103 933.2410 -939.10096 1325.1010
## 432            193 -566.47127 952.4713 -968.51108 1354.5111
accuracy(sleepy_naive,ts_sleepy_test)
##                 ME     RMSE       MAE       MPE     MAPE     MASE      ACF1
## Training set  1.00 132.5135  71.94643 -12.41402 36.91139 1.000000 0.1845285
## Test set     12.25 178.4787 115.25000 -32.49337 56.25039 1.601886 0.6316829
##              Theil's U
## Training set        NA
## Test set      1.435697
##mean
sleepy_mean<-meanf(ts_sleepy_train,20)
summary(sleepy_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 223.1504
## 
## $mu.se
## [1] 15.45001
## 
## $sd
## [1] 164.2359
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_sleepy_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                         ME     RMSE     MAE       MPE     MAPE   MASE      ACF1
## Training set -6.536991e-15 163.5075 84.0838 -29.77285 45.65213 1.1687 0.6710025
## 
## Forecasts:
##     Point Forecast    Lo 80    Hi 80     Lo 95    Hi 95
## 413       223.1504 10.49003 435.8109 -103.6985 549.9994
## 414       223.1504 10.49003 435.8109 -103.6985 549.9994
## 415       223.1504 10.49003 435.8109 -103.6985 549.9994
## 416       223.1504 10.49003 435.8109 -103.6985 549.9994
## 417       223.1504 10.49003 435.8109 -103.6985 549.9994
## 418       223.1504 10.49003 435.8109 -103.6985 549.9994
## 419       223.1504 10.49003 435.8109 -103.6985 549.9994
## 420       223.1504 10.49003 435.8109 -103.6985 549.9994
## 421       223.1504 10.49003 435.8109 -103.6985 549.9994
## 422       223.1504 10.49003 435.8109 -103.6985 549.9994
## 423       223.1504 10.49003 435.8109 -103.6985 549.9994
## 424       223.1504 10.49003 435.8109 -103.6985 549.9994
## 425       223.1504 10.49003 435.8109 -103.6985 549.9994
## 426       223.1504 10.49003 435.8109 -103.6985 549.9994
## 427       223.1504 10.49003 435.8109 -103.6985 549.9994
## 428       223.1504 10.49003 435.8109 -103.6985 549.9994
## 429       223.1504 10.49003 435.8109 -103.6985 549.9994
## 430       223.1504 10.49003 435.8109 -103.6985 549.9994
## 431       223.1504 10.49003 435.8109 -103.6985 549.9994
## 432       223.1504 10.49003 435.8109 -103.6985 549.9994
accuracy(sleepy_mean,ts_sleepy_test)
##                         ME     RMSE      MAE       MPE     MAPE    MASE
## Training set -6.536991e-15 163.5075  84.0838 -29.77285 45.65213 1.16870
## Test set     -1.790044e+01 178.9553 130.3252 -53.19147 72.84882 1.81142
##                   ACF1 Theil's U
## Training set 0.6710025        NA
## Test set     0.6316829  1.716916
##ets
sleepy_ets<-ets(ts_sleepy_train)
checkresiduals(sleepy_ets)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(M,A,N)
## Q* = 18.773, df = 6, p-value = 0.004564
## 
## Model df: 4.   Total lags used: 10
sleepy_ets_fc<-forecast(sleepy_ets,20)
autoplot(sleepy_ets_fc)

summary(sleepy_ets)
## ETS(M,A,N) 
## 
## Call:
##  ets(y = ts_sleepy_train) 
## 
##   Smoothing parameters:
##     alpha = 0.8978 
##     beta  = 3e-04 
## 
##   Initial states:
##     l = 89.3627 
##     b = 28.4785 
## 
##   sigma:  0.4228
## 
##      AIC     AICc      BIC 
## 1573.877 1574.438 1587.514 
## 
## Training set error measures:
##                     ME     RMSE    MAE       MPE     MAPE     MASE      ACF1
## Training set -30.09652 137.9725 77.615 -31.29353 43.24843 1.078789 0.2457719
accuracy(sleepy_ets_fc,ts_sleepy_test)
##                      ME     RMSE      MAE        MPE      MAPE     MASE
## Training set  -30.09652 137.9725  77.6150  -31.29353  43.24843 1.078789
## Test set     -275.00767 406.2965 368.5761 -253.67636 268.36253 5.122924
##                   ACF1 Theil's U
## Training set 0.2457719        NA
## Test set     0.8070913  6.225621
##arima
sleepy_arima<-auto.arima(ts_sleepy_train)
checkresiduals(sleepy_arima)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(0,0,2) with non-zero mean
## Q* = 4.5785, df = 7, p-value = 0.7112
## 
## Model df: 3.   Total lags used: 10
sleepy_arima_fc<-forecast(sleepy_arima,20)
autoplot(sleepy_arima_fc)

accuracy(sleepy_arima_fc,ts_sleepy_test)
##                       ME     RMSE       MAE       MPE     MAPE     MASE
## Training set   0.5166868 110.1224  62.60735 -17.90589 36.53934 0.870194
## Test set     -15.2465969 179.3098 131.18022 -51.83645 72.72038 1.823304
##                    ACF1 Theil's U
## Training set 0.03399085        NA
## Test set     0.65584251  1.703929
sleepy_arima_fc2<-forecast(sleepy_arima,2)
ts_sleepy_test2<-window(ts_sleepy_test,end=414)
accuracy(sleepy_arima_fc,ts_sleepy_test[1:2])
##                       ME     RMSE       MAE       MPE     MAPE     MASE
## Training set   0.5166868 110.1224  62.60735 -17.90589 36.53934 0.870194
## Test set     343.5949378 429.5196 343.59494  51.93037 51.93037 4.775705
##                    ACF1
## Training set 0.03399085
## Test set             NA
##tbats

sleepy_tbats<-tbats(ts_sleepy_train)
sleepy_tbats_fc<-forecast(sleepy_tbats,20)
autoplot(sleepy_tbats_fc)

accuracy(sleepy_tbats_fc,ts_sleepy_test)
##                    ME     RMSE      MAE        MPE     MAPE      MASE      ACF1
## Training set 10.82399 118.8437 60.77630 -12.337497 31.77580 0.8447438 0.3717874
## Test set     69.86837 194.9753 95.34392   6.210551 30.02737 1.3252071 0.6277320
##              Theil's U
## Training set        NA
## Test set      1.198207
##linear regression
autoplot(ts_sleepy)

sleepy<-sleepy[,-"product_content_id"]
train.sleepy<-sleepy[300:412,]
test.sleepy<-sleepy[413:432,]
sleepy_linear<-lm(sold_count~.,data=train.sleepy)
sleepy.predict<-predict(sleepy_linear,test.sleepy)
accuracy(sleepy.predict,test.sleepy$sold_count)
##                 ME     RMSE      MAE       MPE     MAPE
## Test set 0.1126242 46.35828 31.11812 -6.344007 15.87181
plot(sleepy.predict)

##linear according to correlation
ggcorr(train.sleepy,label=TRUE)
## Warning in ggcorr(train.sleepy, label = TRUE): data in column(s) 'event_date'
## are not numeric and were ignored

sleepy_linear_corr<-lm(sold_count~basket_count+favored_count+visit_count+
                         category_visits,train.sleepy)
sleepy.predict.corr<-predict(sleepy_linear_corr,test.sleepy)
accuracy(sleepy.predict.corr,test.sleepy$sold_count)
##                ME     RMSE      MAE      MPE     MAPE
## Test set 23.00037 53.35012 32.68619 8.195822 14.97151
plot(sleepy.predict.corr,type="l",xlab="Days",ylab="Sleepy Wet Towel")

####La Roche
##Building test and train sets
ts_la_roche_train<-window(ts_la_roche,start=300,end=412)
ts_la_roche_test<-window(ts_la_roche,start=413,end=432)

##holt
ts_la_roche_train<-window(ts_la_roche,start=300,end=412)
ts_la_roche_test<-window(ts_la_roche,start=413,end=432)
la_roche_holt<-holt(ts_la_roche_train,20)
summary(la_roche_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = ts_la_roche_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.7254 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 5.4763 
##     b = 0.2199 
## 
##   sigma:  18.5148
## 
##      AIC     AICc      BIC 
## 1199.719 1200.280 1213.356 
## 
## Error measures:
##                       ME     RMSE      MAE       MPE     MAPE     MASE
## Training set -0.01254865 18.18414 13.08172 -14.45196 36.83587 0.993324
##                    ACF1
## Training set 0.04919043
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80      Lo 95     Hi 95
## 413       30.06596   6.3383000  53.79362  -6.222365  66.35429
## 414       30.28575   0.9711234  59.60037 -14.547101  75.11860
## 415       30.50553  -3.4911449  64.50221 -21.487900  82.49897
## 416       30.72532  -7.3835088  68.83415 -27.557105  89.00775
## 417       30.94511 -10.8744255  72.76464 -33.012349  94.90256
## 418       31.16489 -14.0628132  76.39260 -38.004915 100.33470
## 419       31.38468 -17.0126273  79.78199 -42.632615 105.40198
## 420       31.60447 -19.7680457  82.97698 -46.963011 110.17195
## 421       31.82425 -22.3610996  86.00961 -51.045093 114.69360
## 422       32.04404 -24.8158906  88.90397 -54.915720 119.00380
## 423       32.26383 -27.1510934  91.67875 -58.603452 123.13111
## 424       32.48361 -29.3815266  94.34875 -62.130954 127.09818
## 425       32.70340 -31.5191831  96.92598 -65.516565 130.92336
## 426       32.92319 -33.5739309  99.42030 -68.775378 134.62175
## 427       33.14297 -35.5540058 101.83995 -71.919990 138.20594
## 428       33.36276 -37.4663660 104.19189 -74.961040 141.68656
## 429       33.58255 -39.3169533 106.48205 -77.907617 145.07271
## 430       33.80233 -41.1108896 108.71556 -80.767554 148.37222
## 431       34.02212 -42.8526270 110.89687 -83.547659 151.59190
## 432       34.24191 -44.5460643 113.02988 -86.253896 154.73771
autoplot(la_roche_holt)

accuracy(la_roche_holt,ts_la_roche_test)
##                       ME     RMSE      MAE        MPE     MAPE     MASE
## Training set -0.01254865 18.18414 13.08172 -14.451960 36.83587 0.993324
## Test set     10.84606646 24.10274 15.91868   8.553503 31.54323 1.208740
##                    ACF1 Theil's U
## Training set 0.04919043        NA
## Test set     0.53323940 0.9744738
checkresiduals(la_roche_holt)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 16.321, df = 6, p-value = 0.01213
## 
## Model df: 4.   Total lags used: 10
##ses
la_roche_ses<-ses(ts_la_roche_train,20)
summary(la_roche_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = ts_la_roche_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.7279 
## 
##   Initial states:
##     l = 10.8691 
## 
##   sigma:  18.3405
## 
##      AIC     AICc      BIC 
## 1195.636 1195.856 1203.818 
## 
## Error measures:
##                     ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.2289621 18.17745 13.04502 -13.97565 36.26303 0.9905371
##                    ACF1
## Training set 0.04766302
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80      Lo 95     Hi 95
## 413       29.70209   6.1978253  53.20636  -6.244582  65.64877
## 414       29.70209   0.6303517  58.77383 -14.759298  74.16348
## 415       29.70209  -4.0304088  63.43459 -21.887316  81.29150
## 416       29.70209  -8.1211435  67.52533 -28.143555  87.54774
## 417       29.70209 -11.8107093  71.21489 -33.786259  93.19044
## 418       29.70209 -15.1981107  74.60229 -38.966842  98.37103
## 419       29.70209 -18.3472965  77.75148 -43.783106 103.18729
## 420       29.70209 -21.3024104  80.70659 -48.302563 107.70675
## 421       29.70209 -24.0954432  83.49963 -52.574138 111.97832
## 422       29.70209 -26.7504573  86.15464 -56.634632 116.03882
## 423       29.70209 -29.2860920  88.69028 -60.512550 119.91673
## 424       29.70209 -31.7171345  91.12132 -64.230509 123.63469
## 425       29.70209 -34.0555500  93.45973 -67.806807 127.21099
## 426       29.70209 -36.3111827  95.71537 -71.256499 130.66068
## 427       29.70209 -38.4922477  97.89643 -74.592150 133.99633
## 428       29.70209 -40.6056849 100.00987 -77.824374 137.22856
## 429       29.70209 -42.6574203 102.06160 -80.962232 140.36642
## 430       29.70209 -44.6525619 104.05675 -84.013538 143.41772
## 431       29.70209 -46.5955494 105.99973 -86.985081 146.38926
## 432       29.70209 -48.4902709 107.89445 -89.882807 149.28699
autoplot(la_roche_ses)+autolayer(fitted(la_roche_ses))

accuracy(la_roche_ses,ts_la_roche_test)
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set  0.2289621 18.17745 13.04502 -13.97565 36.26303 0.9905371
## Test set     13.2979082 24.62386 16.21916  16.29590 30.41988 1.2315568
##                    ACF1 Theil's U
## Training set 0.04766302        NA
## Test set     0.49813737 0.9763132
##naive
la_roche_naive<-naive(ts_la_roche_train,20)
summary(la_roche_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_la_roche_train, h = 20) 
## 
## Residual sd: 18.7636 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE     MAPE MASE      ACF1
## Training set 0.09821429 18.67987 13.16964 -13.36197 36.88527    1 -0.184464
## 
## Forecasts:
##     Point Forecast       Lo 80     Hi 80      Lo 95     Hi 95
## 413             23  -0.9392152  46.93922  -13.61187  59.61187
## 414             23 -10.8551628  56.85516  -28.77700  74.77700
## 415             23 -18.4639370  64.46394  -40.41362  86.41362
## 416             23 -24.8784303  70.87843  -50.22374  96.22374
## 417             23 -30.5297124  76.52971  -58.86663 104.86663
## 418             23 -35.6388620  81.63886  -66.68040 112.68040
## 419             23 -40.3372099  86.33721  -73.86590 119.86590
## 420             23 -44.7103255  90.71033  -80.55401 126.55401
## 421             23 -48.8176455  94.81765  -86.83561 132.83561
## 422             23 -52.7024453  98.70245  -92.77690 138.77690
## 423             23 -56.3973945 102.39739  -98.42784 144.42784
## 424             23 -59.9278739 105.92787 -103.82724 149.82724
## 425             23 -63.3140678 109.31407 -109.00598 155.00598
## 426             23 -66.5723412 112.57234 -113.98907 159.98907
## 427             23 -69.7161816 115.71618 -118.79716 164.79716
## 428             23 -72.7568606 118.75686 -123.44748 169.44748
## 429             23 -75.7039127 121.70391 -127.95461 173.95461
## 430             23 -78.5654883 124.56549 -132.33101 178.33101
## 431             23 -81.3486197 127.34862 -136.58744 182.58744
## 432             23 -84.0594249 130.05942 -140.73326 186.73326
accuracy(la_roche_naive,ts_la_roche_test)
##                       ME     RMSE      MAE       MPE     MAPE     MASE
## Training set  0.09821429 18.67987 13.16964 -13.36197 36.88527 1.000000
## Test set     20.00000000 28.80104 20.70000  35.18321 38.78847 1.571797
##                    ACF1 Theil's U
## Training set -0.1844640        NA
## Test set      0.4981374   1.12516
##mean
la_roche_mean<-meanf(ts_la_roche_train,20)
summary(la_roche_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 53.36283
## 
## $mu.se
## [1] 2.754495
## 
## $sd
## [1] 29.28068
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_la_roche_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                        ME     RMSE      MAE      MPE     MAPE     MASE
## Training set 2.762877e-15 29.15083 22.80852 -95.2591 121.4718 1.731901
##                   ACF1
## Training set 0.7827953
## 
## Forecasts:
##     Point Forecast    Lo 80    Hi 80     Lo 95    Hi 95
## 413       53.36283 15.44881 91.27685 -4.909225 111.6349
## 414       53.36283 15.44881 91.27685 -4.909225 111.6349
## 415       53.36283 15.44881 91.27685 -4.909225 111.6349
## 416       53.36283 15.44881 91.27685 -4.909225 111.6349
## 417       53.36283 15.44881 91.27685 -4.909225 111.6349
## 418       53.36283 15.44881 91.27685 -4.909225 111.6349
## 419       53.36283 15.44881 91.27685 -4.909225 111.6349
## 420       53.36283 15.44881 91.27685 -4.909225 111.6349
## 421       53.36283 15.44881 91.27685 -4.909225 111.6349
## 422       53.36283 15.44881 91.27685 -4.909225 111.6349
## 423       53.36283 15.44881 91.27685 -4.909225 111.6349
## 424       53.36283 15.44881 91.27685 -4.909225 111.6349
## 425       53.36283 15.44881 91.27685 -4.909225 111.6349
## 426       53.36283 15.44881 91.27685 -4.909225 111.6349
## 427       53.36283 15.44881 91.27685 -4.909225 111.6349
## 428       53.36283 15.44881 91.27685 -4.909225 111.6349
## 429       53.36283 15.44881 91.27685 -4.909225 111.6349
## 430       53.36283 15.44881 91.27685 -4.909225 111.6349
## 431       53.36283 15.44881 91.27685 -4.909225 111.6349
## 432       53.36283 15.44881 91.27685 -4.909225 111.6349
accuracy(la_roche_mean,ts_la_roche_test)
##                         ME     RMSE      MAE       MPE      MAPE     MASE
## Training set  2.762877e-15 29.15083 22.80852 -95.25910 121.47178 1.731901
## Test set     -1.036283e+01 23.17085 20.68142 -50.38294  62.72762 1.570385
##                   ACF1 Theil's U
## Training set 0.7827953        NA
## Test set     0.4981374  1.036926
##ets
la_roche_ets<-ets(ts_la_roche_train)
checkresiduals(la_roche_ets)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(A,N,N)
## Q* = 16.18, df = 8, p-value = 0.03987
## 
## Model df: 2.   Total lags used: 10
la_roche_ets_fc<-forecast(la_roche_ets,20)
autoplot(la_roche_ets_fc)

summary(la_roche_ets)
## ETS(A,N,N) 
## 
## Call:
##  ets(y = ts_la_roche_train) 
## 
##   Smoothing parameters:
##     alpha = 0.7278 
## 
##   Initial states:
##     l = 10.8187 
## 
##   sigma:  18.3405
## 
##      AIC     AICc      BIC 
## 1195.636 1195.856 1203.818 
## 
## Training set error measures:
##                     ME     RMSE      MAE       MPE     MAPE      MASE     ACF1
## Training set 0.2296552 18.17745 13.04539 -13.97018 36.26489 0.9905649 0.047763
accuracy(la_roche_ets_fc,ts_la_roche_test)
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set  0.2296552 18.17745 13.04539 -13.97018 36.26489 0.9905649
## Test set     13.2947346 24.62214 16.21789  16.28696 30.41886 1.2314604
##                   ACF1 Theil's U
## Training set 0.0477630        NA
## Test set     0.4981374  0.976256
##arima
la_roche_arima<-auto.arima(ts_la_roche_train)
checkresiduals(la_roche_arima)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,1,1)
## Q* = 6.6679, df = 8, p-value = 0.5729
## 
## Model df: 2.   Total lags used: 10
la_roche_arima_fc<-forecast(la_roche_arima,20)
autoplot(la_roche_arima_fc)

accuracy(la_roche_arima_fc,ts_la_roche_test)
##                     ME     RMSE      MAE          MPE     MAPE      MASE
## Training set 0.6724592 17.42081 12.75499 -13.58058522 34.78088 0.9685142
## Test set     7.5016807 22.42854 15.19963  -0.07433621 32.16189 1.1541414
##                    ACF1 Theil's U
## Training set 0.01387811        NA
## Test set     0.54730368 0.9381456
##tbats

la_roche_tbats<-tbats(ts_la_roche_train)
la_roche_tbats_fc<-forecast(la_roche_tbats,20)
autoplot(la_roche_tbats_fc)

accuracy(la_roche_tbats_fc,ts_la_roche_test)
##                      ME     RMSE      MAE        MPE     MAPE     MASE
## Training set  0.8680211 18.33629 13.18135 -14.121367 36.36587 1.000889
## Test set     10.4943039 23.22995 15.29886   8.395002 30.15934 1.161676
##                   ACF1 Theil's U
## Training set 0.2007237        NA
## Test set     0.4981374 0.9315743
##linear regression
autoplot(ts_la_roche)

la_roche<-la_roche[,-"product_content_id"]
train.la_roche<-la_roche[300:412,]
test.la_roche<-la_roche[413:432,]
la_roche_linear<-lm(sold_count~.,data=train.la_roche)
la_roche.predict<-predict(la_roche_linear,test.la_roche)
accuracy(la_roche.predict,test.la_roche$sold_count)
##                 ME     RMSE      MAE       MPE     MAPE
## Test set -7.349925 12.02528 9.846595 -26.78751 29.66923
plot(la_roche.predict)

##linear according to correlation
ggcorr(train.la_roche,label=TRUE)
## Warning in ggcorr(train.la_roche, label = TRUE): data in column(s) 'event_date'
## are not numeric and were ignored

la_roche_linear_corr<-lm(sold_count~basket_count+favored_count+visit_count+
                           category_visits+ty_visits,train.la_roche)
la_roche.predict.corr<-predict(la_roche_linear_corr,test.la_roche)
accuracy(la_roche.predict.corr,test.la_roche$sold_count)
##                ME     RMSE      MAE       MPE     MAPE
## Test set 3.409211 10.63975 7.755503 0.4453327 18.63708
plot(la_roche.predict.corr,type="l",xlab="Days",ylab="La Roche Cleanser")

####Trendyol Tayt
##Building test and train sets
ts_trend_tayt_train<-window(ts_trend_tayt,start=300,end=412)
ts_trend_tayt_test<-window(ts_trend_tayt,start=413,end=432)

##holt
trend_tayt_holt<-holt(ts_trend_tayt_train,20)
summary(trend_tayt_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = ts_trend_tayt_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.5918 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 879.0846 
##     b = -2.0397 
## 
##   sigma:  305.3758
## 
##      AIC     AICc      BIC 
## 1833.191 1833.752 1846.828 
## 
## Error measures:
##                     ME     RMSE      MAE  MPE MAPE     MASE      ACF1
## Training set 0.1447713 299.9222 178.4934 -Inf  Inf 1.005951 0.1121222
## 
## Forecasts:
##     Point Forecast       Lo 80    Hi 80       Lo 95    Hi 95
## 413       654.0773  262.722477 1045.432    55.55173 1252.603
## 414       652.0393  197.266730 1106.812   -43.47532 1347.554
## 415       650.0012  139.613480 1160.389  -130.56948 1430.572
## 416       647.9632   87.435416 1208.491  -209.29006 1505.216
## 417       645.9251   39.373210 1252.477  -281.71599 1573.566
## 418       643.8871   -5.448913 1293.223  -349.18663 1636.961
## 419       641.8490  -47.634382 1331.332  -412.62486 1696.323
## 420       639.8110  -87.619889 1367.242  -472.69854 1752.321
## 421       637.7729 -125.733528 1401.279  -529.90944 1805.455
## 422       635.7349 -162.229223 1433.699  -584.64591 1856.116
## 423       633.6968 -197.308263 1464.702  -637.21580 1904.609
## 424       631.6588 -231.133420 1494.451  -687.86803 1951.186
## 425       629.6207 -263.838535 1523.080  -736.80731 1996.049
## 426       627.5827 -295.535245 1550.701  -784.20437 2039.370
## 427       625.5447 -326.317817 1577.407  -830.20337 2081.293
## 428       623.5066 -356.266713 1603.280  -874.92738 2121.941
## 429       621.4686 -385.451262 1628.388  -918.48242 2161.420
## 430       619.4305 -413.931707 1652.793  -960.96062 2199.822
## 431       617.3925 -441.760785 1676.546 -1002.44264 2237.228
## 432       615.3544 -468.984977 1699.694 -1042.99957 2273.708
autoplot(trend_tayt_holt)

accuracy(trend_tayt_holt,ts_trend_tayt_test)
##                        ME     RMSE      MAE       MPE     MAPE     MASE
## Training set    0.1447713 299.9222 178.4934      -Inf      Inf 1.005951
## Test set     -187.5658707 243.7958 229.1578 -56.63483 61.86349 1.291484
##                   ACF1 Theil's U
## Training set 0.1121222        NA
## Test set     0.7571179  3.977911
checkresiduals(trend_tayt_holt)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 22.455, df = 6, p-value = 0.001001
## 
## Model df: 4.   Total lags used: 10
##ses
trend_tayt_ses<-ses(ts_trend_tayt_train,20)
summary(trend_tayt_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = ts_trend_tayt_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.5942 
## 
##   Initial states:
##     l = 897.8195 
## 
##   sigma:  302.6001
## 
##      AIC     AICc      BIC 
## 1829.182 1829.402 1837.364 
## 
## Error measures:
##                     ME     RMSE      MAE  MPE MAPE     MASE      ACF1
## Training set -3.575563 299.9103 178.9255 -Inf  Inf 1.008386 0.1107847
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80     Lo 95    Hi 95
## 413       657.7487  269.95099 1045.546   64.6633 1250.834
## 414       657.7487  206.66021 1108.837  -32.1316 1347.629
## 415       657.7487  151.21678 1164.281 -116.9250 1432.422
## 416       657.7487  101.27017 1214.227 -193.3117 1508.809
## 417       657.7487   55.45134 1260.046 -263.3856 1578.883
## 418       657.7487   12.87982 1302.618 -328.4931 1643.990
## 419       657.7487  -27.05027 1342.548 -389.5609 1705.058
## 420       657.7487  -64.77701 1380.274 -447.2590 1762.756
## 421       657.7487 -100.62928 1416.127 -502.0903 1817.588
## 422       657.7487 -134.86149 1450.359 -554.4440 1869.941
## 423       657.7487 -167.67524 1483.173 -604.6283 1920.126
## 424       657.7487 -199.23347 1514.731 -652.8924 1968.390
## 425       657.7487 -229.67014 1545.167 -699.4413 2014.939
## 426       657.7487 -259.09696 1574.594 -744.4457 2059.943
## 427       657.7487 -287.60823 1603.106 -788.0499 2103.547
## 428       657.7487 -315.28443 1630.782 -830.3770 2145.874
## 429       657.7487 -342.19492 1657.692 -871.5331 2187.030
## 430       657.7487 -368.39992 1683.897 -911.6102 2227.108
## 431       657.7487 -393.95219 1709.450 -950.6890 2266.186
## 432       657.7487 -418.89819 1734.396 -988.8406 2304.338
autoplot(trend_tayt_ses)+autolayer(fitted(trend_tayt_ses))

accuracy(trend_tayt_ses,ts_trend_tayt_test)
##                       ME     RMSE      MAE       MPE    MAPE     MASE      ACF1
## Training set   -3.575563 299.9103 178.9255      -Inf     Inf 1.008386 0.1107847
## Test set     -210.598667 267.2649 249.4992 -62.99381 67.8606 1.406124 0.7664920
##              Theil's U
## Training set        NA
## Test set      4.398075
##naive
trend_tayt_naive<-naive(ts_trend_tayt_train,20)
summary(trend_tayt_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_trend_tayt_train, h = 20) 
## 
## Residual sd: 313.4998 
## 
## Error measures:
##                     ME     RMSE      MAE  MPE MAPE MASE     ACF1
## Training set -2.991071 312.1115 177.4375 -Inf  Inf    1 -0.20393
## 
## Forecasts:
##     Point Forecast       Lo 80    Hi 80       Lo 95    Hi 95
## 413            672   272.01307 1071.987    60.27278 1283.727
## 414            672   106.33306 1237.667  -193.11293 1537.113
## 415            672   -20.79768 1364.798  -387.54262 1731.543
## 416            672  -127.97386 1471.974  -551.45444 1895.454
## 417            672  -222.39796 1566.398  -695.86364 2039.864
## 418            672  -307.76388 1651.764  -826.41955 2170.420
## 419            672  -386.26594 1730.266  -946.47809 2290.478
## 420            672  -459.33388 1803.334 -1058.22586 2402.226
## 421            672  -527.96078 1871.961 -1163.18165 2507.182
## 422            672  -592.86973 1936.870 -1262.45131 2606.451
## 423            672  -654.60656 1998.607 -1356.86966 2700.870
## 424            672  -713.59536 2057.595 -1447.08524 2791.085
## 425            672  -770.17338 2114.173 -1533.61385 2877.614
## 426            672  -824.61404 2168.614 -1616.87366 2960.874
## 427            672  -877.14271 2221.143 -1697.20933 3041.209
## 428            672  -927.94771 2271.948 -1774.90887 3118.909
## 429            672  -977.18835 2321.188 -1850.21593 3194.216
## 430            672 -1025.00082 2369.001 -1923.33878 3267.339
## 431            672 -1071.50260 2415.503 -1994.45712 3338.457
## 432            672 -1116.79592 2460.796 -2063.72729 3407.727
accuracy(trend_tayt_naive,ts_trend_tayt_test)
##                       ME     RMSE      MAE       MPE     MAPE     MASE
## Training set   -2.991071 312.1115 177.4375      -Inf      Inf 1.000000
## Test set     -224.850000 278.6328 258.0500 -66.52537 70.63093 1.454315
##                   ACF1 Theil's U
## Training set -0.203930        NA
## Test set      0.766492  4.596106
##mean
trend_tayt_mean<-meanf(ts_trend_tayt_train,20)
summary(trend_tayt_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 648.0796
## 
## $mu.se
## [1] 31.08111
## 
## $sd
## [1] 330.3968
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_trend_tayt_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                         ME     RMSE      MAE  MPE MAPE     MASE      ACF1
## Training set -1.810072e-14 328.9316 257.4354 -Inf  Inf 1.450851 0.5485202
## 
## Forecasts:
##     Point Forecast    Lo 80    Hi 80     Lo 95    Hi 95
## 413       648.0796 220.2662 1075.893 -9.449438 1305.609
## 414       648.0796 220.2662 1075.893 -9.449438 1305.609
## 415       648.0796 220.2662 1075.893 -9.449438 1305.609
## 416       648.0796 220.2662 1075.893 -9.449438 1305.609
## 417       648.0796 220.2662 1075.893 -9.449438 1305.609
## 418       648.0796 220.2662 1075.893 -9.449438 1305.609
## 419       648.0796 220.2662 1075.893 -9.449438 1305.609
## 420       648.0796 220.2662 1075.893 -9.449438 1305.609
## 421       648.0796 220.2662 1075.893 -9.449438 1305.609
## 422       648.0796 220.2662 1075.893 -9.449438 1305.609
## 423       648.0796 220.2662 1075.893 -9.449438 1305.609
## 424       648.0796 220.2662 1075.893 -9.449438 1305.609
## 425       648.0796 220.2662 1075.893 -9.449438 1305.609
## 426       648.0796 220.2662 1075.893 -9.449438 1305.609
## 427       648.0796 220.2662 1075.893 -9.449438 1305.609
## 428       648.0796 220.2662 1075.893 -9.449438 1305.609
## 429       648.0796 220.2662 1075.893 -9.449438 1305.609
## 430       648.0796 220.2662 1075.893 -9.449438 1305.609
## 431       648.0796 220.2662 1075.893 -9.449438 1305.609
## 432       648.0796 220.2662 1075.893 -9.449438 1305.609
accuracy(trend_tayt_mean,ts_trend_tayt_test)
##                         ME     RMSE      MAE       MPE     MAPE     MASE
## Training set -1.810072e-14 328.9316 257.4354      -Inf      Inf 1.450851
## Test set     -2.009296e+02 259.7142 243.6978 -60.59777 65.98103 1.373429
##                   ACF1 Theil's U
## Training set 0.5485202        NA
## Test set     0.7664920  4.264387
##ets
trend_tayt_ets<-ets(ts_trend_tayt_train)
checkresiduals(trend_tayt_ets)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(A,N,N)
## Q* = 22.416, df = 8, p-value = 0.0042
## 
## Model df: 2.   Total lags used: 10
trend_tayt_ets_fc<-forecast(trend_tayt_ets,20)
autoplot(trend_tayt_ets_fc)

summary(trend_tayt_ets)
## ETS(A,N,N) 
## 
## Call:
##  ets(y = ts_trend_tayt_train) 
## 
##   Smoothing parameters:
##     alpha = 0.594 
## 
##   Initial states:
##     l = 897.8354 
## 
##   sigma:  302.6001
## 
##      AIC     AICc      BIC 
## 1829.182 1829.402 1837.364 
## 
## Training set error measures:
##                     ME     RMSE      MAE  MPE MAPE     MASE      ACF1
## Training set -3.577172 299.9103 178.9278 -Inf  Inf 1.008399 0.1109197
accuracy(trend_tayt_ets_fc,ts_trend_tayt_test)
##                       ME     RMSE      MAE       MPE     MAPE     MASE
## Training set   -3.577172 299.9103 178.9278      -Inf      Inf 1.008399
## Test set     -210.581459 267.2513 249.4889 -62.98955 67.85725 1.406066
##                   ACF1 Theil's U
## Training set 0.1109197        NA
## Test set     0.7664920  4.397837
##arima
trend_tayt_arima<-auto.arima(ts_trend_tayt_train)
checkresiduals(trend_tayt_arima)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,0,0) with non-zero mean
## Q* = 8.983, df = 8, p-value = 0.3437
## 
## Model df: 2.   Total lags used: 10
trend_tayt_arima_fc<-forecast(trend_tayt_arima,20)
autoplot(trend_tayt_arima_fc)

accuracy(trend_tayt_arima_fc,ts_trend_tayt_test)
##                       ME     RMSE      MAE       MPE     MAPE     MASE
## Training set   -2.245072 274.3966 179.1991      -Inf      Inf 1.009928
## Test set     -206.193051 262.5491 245.1377 -61.77595 66.64837 1.381544
##                    ACF1 Theil's U
## Training set 0.01181821        NA
## Test set     0.76522197  4.320004
##tbats

trend_tayt_tbats<-tbats(ts_trend_tayt_train)
trend_tayt_tbats_fc<-forecast(trend_tayt_tbats,20)
autoplot(trend_tayt_tbats_fc)

accuracy(trend_tayt_tbats_fc,ts_trend_tayt_test)
##                      ME     RMSE      MAE       MPE     MAPE     MASE
## Training set  -10.43774 273.1901 182.9158      -Inf      Inf 1.030875
## Test set     -234.95370 287.9357 265.7275 -69.14369 72.92566 1.497584
##                     ACF1 Theil's U
## Training set -0.03888996        NA
## Test set      0.76708561  4.752005
##linear regression
autoplot(ts_trend_tayt)

trend_tayt<-trend_tayt[,-"product_content_id"]
train.trend_tayt<-trend_tayt[300:412,]
test.trend_tayt<-trend_tayt[413:432,]
trend_tayt_linear<-lm(sold_count~.,data=train.trend_tayt)
trend_tayt.predict<-predict(trend_tayt_linear,test.trend_tayt)
accuracy(trend_tayt.predict,test.trend_tayt$sold_count)
##                ME    RMSE      MAE     MPE     MAPE
## Test set 98.02491 150.645 133.8769 27.7358 31.90949
plot(trend_tayt.predict)

##linear according to correlation
ggcorr(train.trend_tayt,label=TRUE)
## Warning in ggcorr(train.trend_tayt, label = TRUE): data in column(s)
## 'event_date' are not numeric and were ignored

trend_tayt_linear_corr<-lm(sold_count~basket_count+favored_count+visit_count
                           ,train.trend_tayt)
trend_tayt.predict.corr<-predict(trend_tayt_linear_corr,test.trend_tayt)
accuracy(trend_tayt.predict.corr,test.trend_tayt$sold_count)
##                ME     RMSE     MAE      MPE     MAPE
## Test set 50.59298 114.0405 85.6518 13.50563 17.98632
plot(trend_tayt.predict.corr)

plot(trend_tayt.predict.corr,type="l",xlab="Days",ylab="Trendyol Tights")

####Oralb
##Building test and train sets
ts_oralb_train<-window(ts_oralb,start=280,end=378)
ts_oralb_test<-window(ts_oralb,start=379,end=398)

##holt
oralb_holt<-holt(ts_oralb_train,20)
summary(oralb_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = ts_oralb_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.5822 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 256.6825 
##     b = 0.2307 
## 
##   sigma:  100.0699
## 
##      AIC     AICc      BIC 
## 1372.796 1373.441 1385.772 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set -0.4782952 98.02748 56.13349 -50.88779 75.79857 0.9688415
##                    ACF1
## Training set 0.02147262
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80       Lo 95    Hi 95
## 379       250.1327 121.887920 378.3775   53.999232 446.2662
## 380       250.3586 101.954988 398.7623   23.394846 477.3224
## 381       250.5846  84.444635 416.7245   -3.504524 504.6736
## 382       250.8105  68.647856 432.9731  -27.783208 529.4042
## 383       251.0364  54.145861 447.9269  -50.081692 552.1545
## 384       251.2623  40.666822 461.8578  -70.815700 573.3403
## 385       251.4882  28.022448 474.9540  -90.273197 593.2497
## 386       251.7142  16.075933 487.3524 -108.663411 612.0917
## 387       251.9401   4.724172 499.1560 -126.144028 630.0242
## 388       252.1660  -6.112831 510.4448 -142.837390 647.1694
## 389       252.3919 -16.498619 521.2825 -158.840678 663.6245
## 390       252.6179 -26.484665 531.7204 -174.232613 679.4683
## 391       252.8438 -36.113353 541.8009 -189.078017 694.7656
## 392       253.0697 -45.420082 551.5595 -203.431026 709.5704
## 393       253.2956 -54.434772 561.0260 -217.337401 723.9286
## 394       253.5215 -63.182988 570.2261 -230.836240 737.8793
## 395       253.7475 -71.686777 579.1817 -243.961259 751.4562
## 396       253.9734 -79.965309 587.9121 -256.741776 764.6886
## 397       254.1993 -88.035377 596.4340 -269.203476 777.6021
## 398       254.4252 -95.911791 604.7623 -281.369009 790.2195
autoplot(oralb_holt)

accuracy(oralb_holt,ts_oralb_test)
##                        ME      RMSE       MAE        MPE      MAPE      MASE
## Training set   -0.4782952  98.02748  56.13349  -50.88779  75.79857 0.9688415
## Test set     -168.7289690 171.55697 168.72897 -261.76169 261.76169 2.9121943
##                    ACF1 Theil's U
## Training set 0.02147262        NA
## Test set     0.43052069  6.056433
checkresiduals(oralb_holt)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt's method
## Q* = 11.313, df = 6, p-value = 0.07916
## 
## Model df: 4.   Total lags used: 10
##ses
oralb_ses<-ses(ts_oralb_train,20)
summary(oralb_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = ts_oralb_train, h = 20) 
## 
##   Smoothing parameters:
##     alpha = 0.5891 
## 
##   Initial states:
##     l = 184.0352 
## 
##   sigma:  98.6958
## 
##      AIC     AICc      BIC 
## 1368.121 1368.373 1375.906 
## 
## Error measures:
##                   ME     RMSE      MAE      MPE     MAPE      MASE       ACF1
## Training set 1.11778 97.69381 55.41254 -48.7249 74.04941 0.9563983 0.02667279
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80       Lo 95    Hi 95
## 379       249.2294 122.745598 375.7132   55.789125 442.6696
## 380       249.2294 102.427274 396.0315   24.714930 473.7438
## 381       249.2294  84.597763 413.8610   -2.552955 501.0117
## 382       249.2294  68.518892 429.9399  -27.143466 525.6022
## 383       249.2294  53.758175 444.7006  -49.718033 548.1768
## 384       249.2294  40.036400 458.4224  -70.703679 569.1625
## 385       249.2294  27.160890 471.2979  -90.395071 588.8538
## 386       249.2294  14.992053 483.4667 -109.005702 607.4645
## 387       249.2294   3.424911 495.0339 -126.696118 625.1549
## 388       249.2294  -7.621838 506.0806 -143.590663 642.0494
## 389       249.2294 -18.212687 516.6715 -159.787969 658.2467
## 390       249.2294 -28.399816 526.8586 -175.367839 673.8266
## 391       249.2294 -38.226150 536.6849 -190.395920 688.8547
## 392       249.2294 -47.727508 546.1863 -204.926992 703.3858
## 393       249.2294 -56.934146 555.3929 -219.007330 717.4661
## 394       249.2294 -65.871899 564.3307 -232.676443 731.1352
## 395       249.2294 -74.563034 573.0218 -245.968387 744.4272
## 396       249.2294 -83.026904 581.4857 -258.912760 757.3715
## 397       249.2294 -91.280458 589.7392 -271.535481 769.9943
## 398       249.2294 -99.338635 597.7974 -283.859400 782.3182
autoplot(oralb_ses)+autolayer(fitted(oralb_ses))

accuracy(oralb_ses,ts_oralb_test)
##                      ME      RMSE       MAE       MPE      MAPE      MASE
## Training set    1.11778  97.69381  55.41254  -48.7249  74.04941 0.9563983
## Test set     -165.67939 168.42537 165.67939 -256.8314 256.83142 2.8595597
##                    ACF1 Theil's U
## Training set 0.02667279        NA
## Test set     0.40785154  5.933022
##naive
oralb_naive<-naive(ts_oralb_train,20)
summary(oralb_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_oralb_train, h = 20) 
## 
## Residual sd: 105.3442 
## 
## Error measures:
##                   ME     RMSE      MAE       MPE     MAPE MASE       ACF1
## Training set 1.22449 104.8125 57.93878 -28.63994 59.04047    1 -0.3212452
## 
## Forecasts:
##     Point Forecast       Lo 80    Hi 80      Lo 95     Hi 95
## 379            225   90.677420 359.3226   19.57134  430.4287
## 380            225   35.039186 414.9608  -65.52000  515.5200
## 381            225   -7.653533 457.6535 -130.81287  580.8129
## 382            225  -43.645160 493.6452 -185.85732  635.8573
## 383            225  -75.354420 525.3544 -234.35245  684.3524
## 384            225 -104.021782 554.0218 -278.19539  728.1954
## 385            225 -130.384142 580.3841 -318.51314  768.5131
## 386            225 -154.921629 604.9216 -356.03999  806.0400
## 387            225 -177.967740 627.9677 -391.28598  841.2860
## 388            225 -199.765294 649.7653 -424.62246  874.6225
## 389            225 -220.497599 670.4976 -456.32978  906.3298
## 390            225 -240.307067 690.3071 -486.62575  936.6257
## 391            225 -259.306950 709.3069 -515.68356  965.6836
## 392            225 -277.589074 727.5891 -543.64366  993.6437
## 393            225 -295.229116 745.2291 -570.62177 1020.6218
## 394            225 -312.290320 762.2903 -596.71464 1046.7146
## 395            225 -328.826186 778.8262 -622.00406 1072.0041
## 396            225 -344.882443 794.8824 -646.55999 1096.5600
## 397            225 -360.498552 810.4986 -670.44276 1120.4428
## 398            225 -375.708840 825.7088 -693.70489 1143.7049
accuracy(oralb_naive,ts_oralb_test)
##                      ME     RMSE       MAE        MPE      MAPE    MASE
## Training set    1.22449 104.8125  57.93878  -28.63994  59.04047 1.00000
## Test set     -141.45000 144.6567 141.45000 -222.14126 222.14126 2.44137
##                    ACF1 Theil's U
## Training set -0.3212452        NA
## Test set      0.4078515  5.159023
##mean
oralb_mean<-meanf(ts_oralb_train,20)
summary(oralb_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 146.4747
## 
## $mu.se
## [1] 11.90618
## 
## $sd
## [1] 118.465
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_oralb_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                         ME     RMSE      MAE       MPE     MAPE   MASE
## Training set -1.235053e-14 117.8652 81.38659 -262.7059 288.7156 1.4047
##                   ACF1
## Training set 0.6057377
## 
## Forecasts:
##     Point Forecast     Lo 80    Hi 80     Lo 95    Hi 95
## 379       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 380       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 381       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 382       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 383       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 384       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 385       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 386       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 387       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 388       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 389       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 390       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 391       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 392       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 393       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 394       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 395       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 396       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 397       146.4747 -7.144789 300.0943 -89.79961 382.7491
## 398       146.4747 -7.144789 300.0943 -89.79961 382.7491
accuracy(oralb_mean,ts_oralb_test)
##                         ME      RMSE      MAE       MPE     MAPE     MASE
## Training set -1.235053e-14 117.86522 81.38659 -262.7059 288.7156 1.404700
## Test set     -6.292475e+01  69.83532 62.92475 -109.7136 109.7136 1.086056
##                   ACF1 Theil's U
## Training set 0.6057377        NA
## Test set     0.4078515  2.725752
##ets
oralb_ets<-ets(ts_oralb_train)
checkresiduals(oralb_ets)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(A,N,N)
## Q* = 12.327, df = 8, p-value = 0.1372
## 
## Model df: 2.   Total lags used: 10
oralb_ets_fc<-forecast(oralb_ets,20)
autoplot(oralb_ets_fc)

summary(oralb_ets)
## ETS(A,N,N) 
## 
## Call:
##  ets(y = ts_oralb_train) 
## 
##   Smoothing parameters:
##     alpha = 0.5892 
## 
##   Initial states:
##     l = 183.8392 
## 
##   sigma:  98.6958
## 
##      AIC     AICc      BIC 
## 1368.121 1368.373 1375.906 
## 
## Training set error measures:
##                   ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 1.12102 97.69381 55.41076 -48.72023 74.04525 0.9563674 0.02667636
accuracy(oralb_ets_fc,ts_oralb_test)
##                      ME      RMSE       MAE        MPE      MAPE      MASE
## Training set    1.12102  97.69381  55.41076  -48.72023  74.04525 0.9563674
## Test set     -165.67658 168.42261 165.67658 -256.82740 256.82740 2.8595112
##                    ACF1 Theil's U
## Training set 0.02667636        NA
## Test set     0.40785154  5.932932
##arima
oralb_arima<-auto.arima(ts_oralb_train)
checkresiduals(oralb_arima)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(2,1,1)
## Q* = 6.7455, df = 7, p-value = 0.4558
## 
## Model df: 3.   Total lags used: 10
oralb_arima_fc<-forecast(oralb_arima,20)
autoplot(oralb_arima_fc)

accuracy(oralb_arima_fc,ts_oralb_test)
##                      ME     RMSE      MAE        MPE      MAPE      MASE
## Training set   7.112649 92.90530 51.96090  -75.04919  97.53398 0.8968243
## Test set     -94.160322 98.32862 94.16032 -152.07644 152.07644 1.6251693
##                    ACF1 Theil's U
## Training set 0.01587862        NA
## Test set     0.22154226  3.513086
##tbats
oralb_tbats<-tbats(ts_oralb_train)
oralb_tbats_fc<-forecast(oralb_tbats,20)
autoplot(oralb_tbats_fc)

accuracy(oralb_tbats_fc,ts_oralb_test)
##                       ME      RMSE       MAE        MPE      MAPE      MASE
## Training set    6.939308  97.43357  52.82237  -30.20269  59.03135 0.9116929
## Test set     -154.875983 157.81007 154.87598 -241.36377 241.36377 2.6730973
##                     ACF1 Theil's U
## Training set -0.01075659        NA
## Test set      0.40785154  5.587261
##linear regression
autoplot(ts_oralb)

oralb<-oralb[,-"product_content_id"]
train.oralb<-oralb[280:378,]
test.oralb<-oralb[379:388,]
oralb_linear<-lm(sold_count~.,data=train.oralb)
oralb.predict<-predict(oralb_linear,test.oralb)
accuracy(oralb.predict,test.oralb$sold_count)
##                 ME     RMSE      MAE       MPE     MAPE
## Test set -64.89802 67.68503 64.89802 -74.82121 74.82121
plot(oralb.predict)

##linear according to correlation
ggcorr(train.oralb,label=TRUE)
## Warning in ggcorr(train.oralb, label = TRUE): data in column(s) 'event_date' are
## not numeric and were ignored

oralb_linear_corr<-lm(sold_count~basket_count+favored_count+visit_count+
                        category_visits,train.oralb)
oralb.predict.corr<-predict(oralb_linear_corr,test.oralb)
accuracy(oralb.predict.corr,test.oralb$sold_count)
##                 ME     RMSE      MAE       MPE     MAPE
## Test set -47.97082 52.73447 47.97082 -57.36166 57.36166
plot(oralb.predict.corr,type="l",xlab="Days",ylab="Oral-B Toothbrush")

####Koton Mont
##Building test and train sets
ts_koton_train<-window(ts_koton,start=300,end=395)
ts_koton_test<-window(ts_koton,start=396,end=415)

##naive
koton_naive<-naive(ts_koton_train,20)
summary(koton_naive)
## 
## Forecast method: Naive method
## 
## Model Information:
## Call: naive(y = ts_koton_train, h = 20) 
## 
## Residual sd: 0.2526 
## 
## Error measures:
##              ME      RMSE        MAE  MPE MAPE MASE ACF1
## Training set  0 0.2513123 0.06315789 -Inf  Inf    1 -0.5
## 
## Forecasts:
##     Point Forecast      Lo 80     Hi 80      Lo 95     Hi 95
## 396              0 -0.3220697 0.3220697 -0.4925631 0.4925631
## 397              0 -0.4554754 0.4554754 -0.6965895 0.6965895
## 398              0 -0.5578411 0.5578411 -0.8531444 0.8531444
## 399              0 -0.6441395 0.6441395 -0.9851263 0.9851263
## 400              0 -0.7201698 0.7201698 -1.1014047 1.1014047
## 401              0 -0.7889065 0.7889065 -1.2065284 1.2065284
## 402              0 -0.8521164 0.8521164 -1.3031996 1.3031996
## 403              0 -0.9109508 0.9109508 -1.3931790 1.3931790
## 404              0 -0.9662092 0.9662092 -1.4776894 1.4776894
## 405              0 -1.0184739 1.0184739 -1.5576214 1.5576214
## 406              0 -1.0681844 1.0681844 -1.6336471 1.6336471
## 407              0 -1.1156823 1.1156823 -1.7062888 1.7062888
## 408              0 -1.1612389 1.1612389 -1.7759617 1.7759617
## 409              0 -1.2050746 1.2050746 -1.8430025 1.8430025
## 410              0 -1.2473707 1.2473707 -1.9076889 1.9076889
## 411              0 -1.2882789 1.2882789 -1.9702526 1.9702526
## 412              0 -1.3279275 1.3279275 -2.0308899 2.0308899
## 413              0 -1.3664261 1.3664261 -2.0897684 2.0897684
## 414              0 -1.4038694 1.4038694 -2.1470330 2.1470330
## 415              0 -1.4403396 1.4403396 -2.2028094 2.2028094
accuracy(koton_naive,ts_koton_test)
##              ME      RMSE        MAE  MPE MAPE MASE ACF1 Theil's U
## Training set  0 0.2513123 0.06315789 -Inf  Inf    1 -0.5        NA
## Test set      0 0.0000000 0.00000000  NaN  NaN    0  NaN       NaN
####Trendyol bikini
##Building test and train sets
ts_trend_bikini_train<-window(ts_trend_bikini,start=300,end=395)
ts_trend_bikini_test<-window(ts_trend_bikini,start=396,end=415)

##holt
trend_bikini_holt<-holt(ts_trend_bikini_train,20)
accuracy(trend_bikini_holt,ts_trend_bikini_test)
##                      ME      RMSE       MAE  MPE MAPE     MASE         ACF1
## Training set 0.01615299 0.5805658 0.4359376  NaN  Inf 0.900306 -0.001865744
## Test set     0.07684537 0.8235117 0.6864552 -Inf  Inf 1.417679 -0.048783309
##              Theil's U
## Training set        NA
## Test set             0
##mean
trend_bikini_mean<-meanf(ts_trend_bikini_train,20)
summary(trend_bikini_mean)
## 
## Forecast method: Mean
## 
## Model Information:
## $mu
## [1] 0.3333333
## 
## $mu.se
## [1] 0.06045635
## 
## $sd
## [1] 0.5923489
## 
## $bootstrap
## [1] FALSE
## 
## $call
## meanf(y = ts_trend_bikini_train, h = 20)
## 
## attr(,"class")
## [1] "meanf"
## 
## Error measures:
##                        ME      RMSE       MAE  MPE MAPE     MASE       ACF1
## Training set 3.351145e-17 0.5892557 0.4861111 -Inf  Inf 1.003925 0.06666667
## 
## Forecasts:
##     Point Forecast      Lo 80    Hi 80      Lo 95    Hi 95
## 396      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 397      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 398      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 399      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 400      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 401      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 402      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 403      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 404      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 405      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 406      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 407      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 408      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 409      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 410      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 411      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 412      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 413      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 414      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
## 415      0.3333333 -0.4350798 1.101746 -0.8487368 1.515403
accuracy(trend_bikini_mean,ts_trend_bikini_test)
##                        ME      RMSE       MAE  MPE MAPE     MASE        ACF1
## Training set 3.351145e-17 0.5892557 0.4861111 -Inf  Inf 1.003925  0.06666667
## Test set     4.166667e-01 0.9279607 0.7166667 -Inf  Inf 1.480072 -0.04090909
##              Theil's U
## Training set        NA
## Test set             0
##seasonal naive
trend_bikini_snaive<-snaive(ts_trend_bikini_train,20)
summary(trend_bikini_snaive)
## 
## Forecast method: Seasonal naive method
## 
## Model Information:
## Call: snaive(y = ts_trend_bikini_train, h = 20) 
## 
## Residual sd: 0.8121 
## 
## Error measures:
##              ME      RMSE       MAE  MPE MAPE MASE       ACF1
## Training set  0 0.8078562 0.4842105 -Inf  Inf    1 -0.4677419
## 
## Forecasts:
##     Point Forecast     Lo 80    Hi 80     Lo 95    Hi 95
## 396              0 -1.035309 1.035309 -1.583369 1.583369
## 397              0 -1.464148 1.464148 -2.239222 2.239222
## 398              0 -1.793208 1.793208 -2.742476 2.742476
## 399              0 -2.070619 2.070619 -3.166738 3.166738
## 400              0 -2.315022 2.315022 -3.540521 3.540521
## 401              0 -2.535980 2.535980 -3.878446 3.878446
## 402              0 -2.739171 2.739171 -4.189201 4.189201
## 403              0 -2.928297 2.928297 -4.478444 4.478444
## 404              0 -3.105928 3.105928 -4.750107 4.750107
## 405              0 -3.273936 3.273936 -5.007052 5.007052
## 406              0 -3.433733 3.433733 -5.251441 5.251441
## 407              0 -3.586417 3.586417 -5.484951 5.484951
## 408              0 -3.732861 3.732861 -5.708918 5.708918
## 409              0 -3.873773 3.873773 -5.924424 5.924424
## 410              0 -4.009736 4.009736 -6.132362 6.132362
## 411              0 -4.141237 4.141237 -6.333476 6.333476
## 412              0 -4.268690 4.268690 -6.528398 6.528398
## 413              0 -4.392445 4.392445 -6.717666 6.717666
## 414              0 -4.512809 4.512809 -6.901745 6.901745
## 415              0 -4.630044 4.630044 -7.081041 7.081041
accuracy(trend_bikini_snaive,ts_trend_bikini_test)
##                ME      RMSE       MAE  MPE MAPE     MASE        ACF1 Theil's U
## Training set 0.00 0.8078562 0.4842105 -Inf  Inf 1.000000 -0.46774194        NA
## Test set     0.75 1.1180340 0.7500000  100  100 1.548913 -0.04090909         0
autoplot(trend_bikini_snaive)

##linear regression
trend_bikini<-trend_bikini[,-"product_content_id"]
train.trend_bikini<-trend_bikini[300:395,]
test.trend_bikini<-trend_bikini[396:415,]
trend_bikini_linear<-lm(sold_count~.,data=train.trend_bikini)
trend_bikini.predict<-predict(trend_bikini_linear,test.trend_bikini)
accuracy(trend_bikini.predict,test.trend_bikini$sold_count)
##                 ME      RMSE      MAE MPE MAPE
## Test set 0.1666973 0.6481107 0.375193 NaN  Inf
plot(trend_bikini.predict)

##linear according to correlation
ggcorr(train.trend_bikini,label=TRUE)
## Warning in ggcorr(train.trend_bikini, label = TRUE): data in column(s)
## 'event_date' are not numeric and were ignored

trend_bikini_linear_corr<-lm(sold_count~price,data=train.trend_bikini)
trend_bikini.predict.corr<-predict(trend_bikini_linear_corr,test.trend_bikini)
accuracy(trend_bikini.predict.corr,test.trend_bikini$sold_count)
##                   ME      RMSE       MAE  MPE MAPE
## Test set 0.009608758 0.4770074 0.2877316 -Inf  Inf